Имеем два набора данных: x_pb и x_nup, со свойствами композитов. Объединение необходимо произвести по типу INNER: ETL = получение, очистка, совмещение данных. Необходимо:
import numpy as np #для матричных операций
np.set_printoptions(precision=3, suppress=True)
import pandas as pd #для загрузки данных
import matplotlib.pyplot as plt #для графиков
import seaborn as sns #для отрисовки графиков
from sklearn.preprocessing import MinMaxScaler, LabelEncoder #
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, r2_score, mean_absolute_error
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVR
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.tree import DecisionTreeRegressor
from sklearn.model_selection import GridSearchCV
from pandas.plotting import scatter_matrix
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
df_bp = pd.read_excel(r"C:\Users\juiry\Documents\VKR\X_bp.xlsx", index_col=0)
df_nup = pd.read_excel(r"C:\Users\juiry\Documents\VKR\X_nup.xlsx", index_col=0)
df = df_bp.merge(df_nup, left_index=True,right_index=True, how='inner')
df
| Соотношение матрица-наполнитель | Плотность, кг/м3 | модуль упругости, ГПа | Количество отвердителя, м.% | Содержание эпоксидных групп,%_2 | Температура вспышки, С_2 | Поверхностная плотность, г/м2 | Модуль упругости при растяжении, ГПа | Прочность при растяжении, МПа | Потребление смолы, г/м2 | Угол нашивки, град | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0.0 | 1.857143 | 2030.000000 | 738.736842 | 30.000000 | 22.267857 | 100.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 57.000000 |
| 1.0 | 1.857143 | 2030.000000 | 738.736842 | 50.000000 | 23.750000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 60.000000 |
| 2.0 | 1.857143 | 2030.000000 | 738.736842 | 49.900000 | 33.000000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 70.000000 |
| 3.0 | 1.857143 | 2030.000000 | 738.736842 | 129.000000 | 21.250000 | 300.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 47.000000 |
| 4.0 | 2.771331 | 2030.000000 | 753.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 57.000000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1018.0 | 2.271346 | 1952.087902 | 912.855545 | 86.992183 | 20.123249 | 324.774576 | 209.198700 | 73.090961 | 2387.292495 | 125.007669 | 90.0 | 9.076380 | 47.019770 |
| 1019.0 | 3.444022 | 2050.089171 | 444.732634 | 145.981978 | 19.599769 | 254.215401 | 350.660830 | 72.920827 | 2360.392784 | 117.730099 | 90.0 | 10.565614 | 53.750790 |
| 1020.0 | 3.280604 | 1972.372865 | 416.836524 | 110.533477 | 23.957502 | 248.423047 | 740.142791 | 74.734344 | 2662.906040 | 236.606764 | 90.0 | 4.161154 | 67.629684 |
| 1021.0 | 3.705351 | 2066.799773 | 741.475517 | 141.397963 | 19.246945 | 275.779840 | 641.468152 | 74.042708 | 2071.715856 | 197.126067 | 90.0 | 6.313201 | 58.261074 |
| 1022.0 | 3.808020 | 1890.413468 | 417.316232 | 129.183416 | 27.474763 | 300.952708 | 758.747882 | 74.309704 | 2856.328932 | 194.754342 | 90.0 | 6.078902 | 77.434468 |
1023 rows × 13 columns
df.shape
(1023, 13)
df.isna().sum()
Соотношение матрица-наполнитель 0 Плотность, кг/м3 0 модуль упругости, ГПа 0 Количество отвердителя, м.% 0 Содержание эпоксидных групп,%_2 0 Температура вспышки, С_2 0 Поверхностная плотность, г/м2 0 Модуль упругости при растяжении, ГПа 0 Прочность при растяжении, МПа 0 Потребление смолы, г/м2 0 Угол нашивки, град 0 Шаг нашивки 0 Плотность нашивки 0 dtype: int64
sns.heatmap(df.isnull(), yticklabels=False, cbar=False, cmap='viridis');
Вывод: пропуски данных отсутствуют
le = LabelEncoder()
df['Угол нашивки, град'] = le.fit_transform(df['Угол нашивки, град'])
df
| Соотношение матрица-наполнитель | Плотность, кг/м3 | модуль упругости, ГПа | Количество отвердителя, м.% | Содержание эпоксидных групп,%_2 | Температура вспышки, С_2 | Поверхностная плотность, г/м2 | Модуль упругости при растяжении, ГПа | Прочность при растяжении, МПа | Потребление смолы, г/м2 | Угол нашивки, град | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0.0 | 1.857143 | 2030.000000 | 738.736842 | 30.000000 | 22.267857 | 100.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0 | 4.000000 | 57.000000 |
| 1.0 | 1.857143 | 2030.000000 | 738.736842 | 50.000000 | 23.750000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0 | 4.000000 | 60.000000 |
| 2.0 | 1.857143 | 2030.000000 | 738.736842 | 49.900000 | 33.000000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0 | 4.000000 | 70.000000 |
| 3.0 | 1.857143 | 2030.000000 | 738.736842 | 129.000000 | 21.250000 | 300.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0 | 5.000000 | 47.000000 |
| 4.0 | 2.771331 | 2030.000000 | 753.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0 | 5.000000 | 57.000000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1018.0 | 2.271346 | 1952.087902 | 912.855545 | 86.992183 | 20.123249 | 324.774576 | 209.198700 | 73.090961 | 2387.292495 | 125.007669 | 1 | 9.076380 | 47.019770 |
| 1019.0 | 3.444022 | 2050.089171 | 444.732634 | 145.981978 | 19.599769 | 254.215401 | 350.660830 | 72.920827 | 2360.392784 | 117.730099 | 1 | 10.565614 | 53.750790 |
| 1020.0 | 3.280604 | 1972.372865 | 416.836524 | 110.533477 | 23.957502 | 248.423047 | 740.142791 | 74.734344 | 2662.906040 | 236.606764 | 1 | 4.161154 | 67.629684 |
| 1021.0 | 3.705351 | 2066.799773 | 741.475517 | 141.397963 | 19.246945 | 275.779840 | 641.468152 | 74.042708 | 2071.715856 | 197.126067 | 1 | 6.313201 | 58.261074 |
| 1022.0 | 3.808020 | 1890.413468 | 417.316232 | 129.183416 | 27.474763 | 300.952708 | 758.747882 | 74.309704 | 2856.328932 | 194.754342 | 1 | 6.078902 | 77.434468 |
1023 rows × 13 columns
df.info();
<class 'pandas.core.frame.DataFrame'> Float64Index: 1023 entries, 0.0 to 1022.0 Data columns (total 13 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Соотношение матрица-наполнитель 1023 non-null float64 1 Плотность, кг/м3 1023 non-null float64 2 модуль упругости, ГПа 1023 non-null float64 3 Количество отвердителя, м.% 1023 non-null float64 4 Содержание эпоксидных групп,%_2 1023 non-null float64 5 Температура вспышки, С_2 1023 non-null float64 6 Поверхностная плотность, г/м2 1023 non-null float64 7 Модуль упругости при растяжении, ГПа 1023 non-null float64 8 Прочность при растяжении, МПа 1023 non-null float64 9 Потребление смолы, г/м2 1023 non-null float64 10 Угол нашивки, град 1023 non-null int64 11 Шаг нашивки 1023 non-null float64 12 Плотность нашивки 1023 non-null float64 dtypes: float64(12), int64(1) memory usage: 111.9 KB
df.describe()
| Соотношение матрица-наполнитель | Плотность, кг/м3 | модуль упругости, ГПа | Количество отвердителя, м.% | Содержание эпоксидных групп,%_2 | Температура вспышки, С_2 | Поверхностная плотность, г/м2 | Модуль упругости при растяжении, ГПа | Прочность при растяжении, МПа | Потребление смолы, г/м2 | Угол нашивки, град | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 | 1023.000000 |
| mean | 2.930366 | 1975.734888 | 739.923233 | 110.570769 | 22.244390 | 285.882151 | 482.731833 | 73.328571 | 2466.922843 | 218.423144 | 0.491691 | 6.899222 | 57.153929 |
| std | 0.913222 | 73.729231 | 330.231581 | 28.295911 | 2.406301 | 40.943260 | 281.314690 | 3.118983 | 485.628006 | 59.735931 | 0.500175 | 2.563467 | 12.350969 |
| min | 0.389403 | 1731.764635 | 2.436909 | 17.740275 | 14.254985 | 100.000000 | 0.603740 | 64.054061 | 1036.856605 | 33.803026 | 0.000000 | 0.000000 | 0.000000 |
| 25% | 2.317887 | 1924.155467 | 500.047452 | 92.443497 | 20.608034 | 259.066528 | 266.816645 | 71.245018 | 2135.850448 | 179.627520 | 0.000000 | 5.080033 | 49.799212 |
| 50% | 2.906878 | 1977.621657 | 739.664328 | 110.564840 | 22.230744 | 285.896812 | 451.864365 | 73.268805 | 2459.524526 | 219.198882 | 0.000000 | 6.916144 | 57.341920 |
| 75% | 3.552660 | 2021.374375 | 961.812526 | 129.730366 | 23.961934 | 313.002106 | 693.225017 | 75.356612 | 2767.193119 | 257.481724 | 1.000000 | 8.586293 | 64.944961 |
| max | 5.591742 | 2207.773481 | 1911.536477 | 198.953207 | 33.000000 | 413.273418 | 1399.542362 | 82.682051 | 3848.436732 | 414.590628 | 1.000000 | 14.440522 | 103.988901 |
scatter_matrix(df, alpha=0.2, figsize=(40, 40), diagonal='kde', color='black');
Вывод: данные не имеют четко выраженной зависимости. Регрессионная зависимость слабо выражена. Корреляция слабая. Возможно, здесь сложная нелинейная зависимость или засимость комбинации признаков.
sns.set(rc = {'figure.figsize':(15, 8)})
# Визуализируем данные корреляции
sns.heatmap(df.corr(), annot=True);
Вывод: корреляция признаков близка к 0, зависимость не линейная.
df.hist(figsize=(20,15), color='black');
Вывод: данные распределены дискретно по параметру "Угол нашивки". Распределение данных по остальным параметрам близко к нормальному распределению.
for col in df.columns:
sns.catplot(x=col, data=df, kind='box', color='white', aspect=1*2)
plt.title(col, fontsize=15)
Вывод: имеются выбросы данных
num_rows = len(df.index)
low_information_cols = []
for col in df.columns:
cnts = df[col].value_counts(dropna=False)
top_pct = (cnts/num_rows).iloc[0]
if top_pct > 0.95:
low_information_cols.append(col)
print('{0}: {1:5f}%'.format(col, top_pct*100))
print(cnts)
print()
Вывод: малоинформативные признаки отсутствуют.
for x in df.columns:
q75,q25 = np.percentile(df.loc[:,x],[75,25])
intr_qr = q75-q25
max = q75+(1.5*intr_qr)
min = q25-(1.5*intr_qr)
df.loc[df[x] < min,x] = np.nan
df.loc[df[x] > max,x] = np.nan
df.isnull().sum()
Соотношение матрица-наполнитель 6 Плотность, кг/м3 9 модуль упругости, ГПа 2 Количество отвердителя, м.% 14 Содержание эпоксидных групп,%_2 2 Температура вспышки, С_2 8 Поверхностная плотность, г/м2 2 Модуль упругости при растяжении, ГПа 6 Прочность при растяжении, МПа 11 Потребление смолы, г/м2 8 Угол нашивки, град 0 Шаг нашивки 4 Плотность нашивки 21 dtype: int64
df
| Соотношение матрица-наполнитель | Плотность, кг/м3 | модуль упругости, ГПа | Количество отвердителя, м.% | Содержание эпоксидных групп,%_2 | Температура вспышки, С_2 | Поверхностная плотность, г/м2 | Модуль упругости при растяжении, ГПа | Прочность при растяжении, МПа | Потребление смолы, г/м2 | Угол нашивки, град | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0.0 | 1.857143 | 2030.000000 | 738.736842 | NaN | 22.267857 | NaN | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 57.000000 |
| 1.0 | 1.857143 | 2030.000000 | 738.736842 | 50.000000 | 23.750000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 60.000000 |
| 2.0 | 1.857143 | 2030.000000 | 738.736842 | 49.900000 | NaN | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 70.000000 |
| 3.0 | 1.857143 | 2030.000000 | 738.736842 | 129.000000 | 21.250000 | 300.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 47.000000 |
| 4.0 | 2.771331 | 2030.000000 | 753.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 57.000000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1018.0 | 2.271346 | 1952.087902 | 912.855545 | 86.992183 | 20.123249 | 324.774576 | 209.198700 | 73.090961 | 2387.292495 | 125.007669 | 1.0 | 9.076380 | 47.019770 |
| 1019.0 | 3.444022 | 2050.089171 | 444.732634 | 145.981978 | 19.599769 | 254.215401 | 350.660830 | 72.920827 | 2360.392784 | 117.730099 | 1.0 | 10.565614 | 53.750790 |
| 1020.0 | 3.280604 | 1972.372865 | 416.836524 | 110.533477 | 23.957502 | 248.423047 | 740.142791 | 74.734344 | 2662.906040 | 236.606764 | 1.0 | 4.161154 | 67.629684 |
| 1021.0 | 3.705351 | 2066.799773 | 741.475517 | 141.397963 | 19.246945 | 275.779840 | 641.468152 | 74.042708 | 2071.715856 | 197.126067 | 1.0 | 6.313201 | 58.261074 |
| 1022.0 | 3.808020 | 1890.413468 | 417.316232 | 129.183416 | 27.474763 | 300.952708 | 758.747882 | 74.309704 | 2856.328932 | 194.754342 | 1.0 | 6.078902 | 77.434468 |
1023 rows × 13 columns
df_clean = df.dropna(axis=0)
df_clean
| Соотношение матрица-наполнитель | Плотность, кг/м3 | модуль упругости, ГПа | Количество отвердителя, м.% | Содержание эпоксидных групп,%_2 | Температура вспышки, С_2 | Поверхностная плотность, г/м2 | Модуль упругости при растяжении, ГПа | Прочность при растяжении, МПа | Потребление смолы, г/м2 | Угол нашивки, град | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 1.0 | 1.857143 | 2030.000000 | 738.736842 | 50.000000 | 23.750000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 60.000000 |
| 3.0 | 1.857143 | 2030.000000 | 738.736842 | 129.000000 | 21.250000 | 300.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 47.000000 |
| 4.0 | 2.771331 | 2030.000000 | 753.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 57.000000 |
| 5.0 | 2.767918 | 2000.000000 | 748.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 60.000000 |
| 6.0 | 2.569620 | 1910.000000 | 807.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 70.000000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1018.0 | 2.271346 | 1952.087902 | 912.855545 | 86.992183 | 20.123249 | 324.774576 | 209.198700 | 73.090961 | 2387.292495 | 125.007669 | 1.0 | 9.076380 | 47.019770 |
| 1019.0 | 3.444022 | 2050.089171 | 444.732634 | 145.981978 | 19.599769 | 254.215401 | 350.660830 | 72.920827 | 2360.392784 | 117.730099 | 1.0 | 10.565614 | 53.750790 |
| 1020.0 | 3.280604 | 1972.372865 | 416.836524 | 110.533477 | 23.957502 | 248.423047 | 740.142791 | 74.734344 | 2662.906040 | 236.606764 | 1.0 | 4.161154 | 67.629684 |
| 1021.0 | 3.705351 | 2066.799773 | 741.475517 | 141.397963 | 19.246945 | 275.779840 | 641.468152 | 74.042708 | 2071.715856 | 197.126067 | 1.0 | 6.313201 | 58.261074 |
| 1022.0 | 3.808020 | 1890.413468 | 417.316232 | 129.183416 | 27.474763 | 300.952708 | 758.747882 | 74.309704 | 2856.328932 | 194.754342 | 1.0 | 6.078902 | 77.434468 |
936 rows × 13 columns
Так же, попробуем заменить выбросы медианным значением
df_median = df.fillna(df.median())
df_median
| Соотношение матрица-наполнитель | Плотность, кг/м3 | модуль упругости, ГПа | Количество отвердителя, м.% | Содержание эпоксидных групп,%_2 | Температура вспышки, С_2 | Поверхностная плотность, г/м2 | Модуль упругости при растяжении, ГПа | Прочность при растяжении, МПа | Потребление смолы, г/м2 | Угол нашивки, град | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0.0 | 1.857143 | 2030.000000 | 738.736842 | 110.564840 | 22.267857 | 285.896812 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 57.000000 |
| 1.0 | 1.857143 | 2030.000000 | 738.736842 | 50.000000 | 23.750000 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 60.000000 |
| 2.0 | 1.857143 | 2030.000000 | 738.736842 | 49.900000 | 22.230744 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 4.000000 | 70.000000 |
| 3.0 | 1.857143 | 2030.000000 | 738.736842 | 129.000000 | 21.250000 | 300.000000 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 47.000000 |
| 4.0 | 2.771331 | 2030.000000 | 753.000000 | 111.860000 | 22.267857 | 284.615385 | 210.000000 | 70.000000 | 3000.000000 | 220.000000 | 0.0 | 5.000000 | 57.000000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1018.0 | 2.271346 | 1952.087902 | 912.855545 | 86.992183 | 20.123249 | 324.774576 | 209.198700 | 73.090961 | 2387.292495 | 125.007669 | 1.0 | 9.076380 | 47.019770 |
| 1019.0 | 3.444022 | 2050.089171 | 444.732634 | 145.981978 | 19.599769 | 254.215401 | 350.660830 | 72.920827 | 2360.392784 | 117.730099 | 1.0 | 10.565614 | 53.750790 |
| 1020.0 | 3.280604 | 1972.372865 | 416.836524 | 110.533477 | 23.957502 | 248.423047 | 740.142791 | 74.734344 | 2662.906040 | 236.606764 | 1.0 | 4.161154 | 67.629684 |
| 1021.0 | 3.705351 | 2066.799773 | 741.475517 | 141.397963 | 19.246945 | 275.779840 | 641.468152 | 74.042708 | 2071.715856 | 197.126067 | 1.0 | 6.313201 | 58.261074 |
| 1022.0 | 3.808020 | 1890.413468 | 417.316232 | 129.183416 | 27.474763 | 300.952708 | 758.747882 | 74.309704 | 2856.328932 | 194.754342 | 1.0 | 6.078902 | 77.434468 |
1023 rows × 13 columns
Нормализованный датасет с удаленными выбросами
scaler = MinMaxScaler()
df_transf = pd.DataFrame(scaler.fit_transform(df_clean))
df_norm = df_transf.set_axis(['Соотношение матрица-наполнитель', 'Плотность', 'Модуль упругости', 'Количество отвердителя', 'Содержание эпоксидных групп', 'Температура вспышки', 'Поверхностная плотность', 'Модуль упругости при растяжении', 'Прочность при растяжении', 'Потребление смолы', 'Угол нашивки', 'Шаг нашивки', 'Плотность нашивки'], axis=1, inplace=False)
df_norm.info();
<class 'pandas.core.frame.DataFrame'> RangeIndex: 936 entries, 0 to 935 Data columns (total 13 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Соотношение матрица-наполнитель 936 non-null float64 1 Плотность 936 non-null float64 2 Модуль упругости 936 non-null float64 3 Количество отвердителя 936 non-null float64 4 Содержание эпоксидных групп 936 non-null float64 5 Температура вспышки 936 non-null float64 6 Поверхностная плотность 936 non-null float64 7 Модуль упругости при растяжении 936 non-null float64 8 Прочность при растяжении 936 non-null float64 9 Потребление смолы 936 non-null float64 10 Угол нашивки 936 non-null float64 11 Шаг нашивки 936 non-null float64 12 Плотность нашивки 936 non-null float64 dtypes: float64(13) memory usage: 95.2 KB
Нормализованный датасет с медианными значениями вместо выбросов
df_transf = pd.DataFrame(scaler.fit_transform(df_median))
df_norm_median = df_transf.set_axis(['Соотношение матрица-наполнитель', 'Плотность', 'Модуль упругости', 'Количество отвердителя', 'Содержание эпоксидных групп', 'Температура вспышки', 'Поверхностная плотность', 'Модуль упругости при растяжении', 'Прочность при растяжении', 'Потребление смолы', 'Угол нашивки', 'Шаг нашивки', 'Плотность нашивки'], axis=1, inplace=False)
df_norm_median
| Соотношение матрица-наполнитель | Плотность | Модуль упругости | Количество отвердителя | Содержание эпоксидных групп | Температура вспышки | Поверхностная плотность | Модуль упругости при растяжении | Прочность при растяжении | Потребление смолы | Угол нашивки | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.274768 | 0.651097 | 0.447061 | 0.502210 | 0.495653 | 0.515364 | 0.162230 | 0.280303 | 0.712590 | 0.529221 | 0.0 | 0.291282 | 0.506083 |
| 1 | 0.274768 | 0.651097 | 0.447061 | 0.079153 | 0.607435 | 0.509164 | 0.162230 | 0.280303 | 0.712590 | 0.529221 | 0.0 | 0.291282 | 0.557156 |
| 2 | 0.274768 | 0.651097 | 0.447061 | 0.078454 | 0.492854 | 0.509164 | 0.162230 | 0.280303 | 0.712590 | 0.529221 | 0.0 | 0.291282 | 0.727399 |
| 3 | 0.274768 | 0.651097 | 0.447061 | 0.630983 | 0.418887 | 0.583596 | 0.162230 | 0.280303 | 0.712590 | 0.529221 | 0.0 | 0.364102 | 0.335840 |
| 4 | 0.466552 | 0.651097 | 0.455721 | 0.511257 | 0.495653 | 0.509164 | 0.162230 | 0.280303 | 0.712590 | 0.529221 | 0.0 | 0.364102 | 0.506083 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1018 | 0.361662 | 0.444480 | 0.552781 | 0.337550 | 0.333908 | 0.703458 | 0.161609 | 0.475147 | 0.463043 | 0.207613 | 1.0 | 0.660946 | 0.336177 |
| 1019 | 0.607674 | 0.704373 | 0.268550 | 0.749605 | 0.294428 | 0.362087 | 0.271207 | 0.464422 | 0.452087 | 0.182974 | 1.0 | 0.769393 | 0.450768 |
| 1020 | 0.573391 | 0.498274 | 0.251612 | 0.501991 | 0.623085 | 0.334063 | 0.572959 | 0.578740 | 0.575296 | 0.585446 | 1.0 | 0.303017 | 0.687046 |
| 1021 | 0.662497 | 0.748688 | 0.448724 | 0.717585 | 0.267818 | 0.466417 | 0.496511 | 0.535142 | 0.334513 | 0.451779 | 1.0 | 0.459730 | 0.527552 |
| 1022 | 0.684036 | 0.280923 | 0.251903 | 0.632264 | 0.888354 | 0.588206 | 0.587373 | 0.551972 | 0.654075 | 0.443749 | 1.0 | 0.442668 | 0.853966 |
1023 rows × 13 columns
for col in df_norm.columns:
sns.catplot(x=col, data=df_norm, kind='box', color='white', aspect=1*2)
plt.title(col, fontsize=15)
df_norm.hist(figsize=(20,15), color='black');
X_1 = df_norm.drop(['Модуль упругости при растяжении'], axis=1)
Y_1 = df_norm['Модуль упругости при растяжении']
X_1.head()
| Соотношение матрица-наполнитель | Плотность | Модуль упругости | Количество отвердителя | Содержание эпоксидных групп | Температура вспышки | Поверхностная плотность | Прочность при растяжении | Потребление смолы | Угол нашивки | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.274768 | 0.651097 | 0.447061 | 0.079153 | 0.607435 | 0.509164 | 0.16223 | 0.71259 | 0.529221 | 0.0 | 0.289334 | 0.557156 |
| 1 | 0.274768 | 0.651097 | 0.447061 | 0.630983 | 0.418887 | 0.583596 | 0.16223 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.335840 |
| 2 | 0.466552 | 0.651097 | 0.455721 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.506083 |
| 3 | 0.465836 | 0.571539 | 0.452685 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.557156 |
| 4 | 0.424236 | 0.332865 | 0.488508 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.727399 |
X_2 = df_norm.drop(['Прочность при растяжении'], axis=1)
Y_2 = df_norm['Прочность при растяжении']
X_2.head()
| Соотношение матрица-наполнитель | Плотность | Модуль упругости | Количество отвердителя | Содержание эпоксидных групп | Температура вспышки | Поверхностная плотность | Модуль упругости при растяжении | Потребление смолы | Угол нашивки | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.274768 | 0.651097 | 0.447061 | 0.079153 | 0.607435 | 0.509164 | 0.16223 | 0.280303 | 0.529221 | 0.0 | 0.289334 | 0.557156 |
| 1 | 0.274768 | 0.651097 | 0.447061 | 0.630983 | 0.418887 | 0.583596 | 0.16223 | 0.280303 | 0.529221 | 0.0 | 0.362355 | 0.335840 |
| 2 | 0.466552 | 0.651097 | 0.455721 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.280303 | 0.529221 | 0.0 | 0.362355 | 0.506083 |
| 3 | 0.465836 | 0.571539 | 0.452685 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.280303 | 0.529221 | 0.0 | 0.362355 | 0.557156 |
| 4 | 0.424236 | 0.332865 | 0.488508 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.280303 | 0.529221 | 0.0 | 0.362355 | 0.727399 |
X_3 = df_norm.drop(['Соотношение матрица-наполнитель'], axis=1)
X_3_median = df_norm_median.drop(['Соотношение матрица-наполнитель'], axis=1)
Y_3 = df_norm['Соотношение матрица-наполнитель']
Y_3_median = df_norm_median['Соотношение матрица-наполнитель']
X_3.head()
| Плотность | Модуль упругости | Количество отвердителя | Содержание эпоксидных групп | Температура вспышки | Поверхностная плотность | Модуль упругости при растяжении | Прочность при растяжении | Потребление смолы | Угол нашивки | Шаг нашивки | Плотность нашивки | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.651097 | 0.447061 | 0.079153 | 0.607435 | 0.509164 | 0.16223 | 0.280303 | 0.71259 | 0.529221 | 0.0 | 0.289334 | 0.557156 |
| 1 | 0.651097 | 0.447061 | 0.630983 | 0.418887 | 0.583596 | 0.16223 | 0.280303 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.335840 |
| 2 | 0.651097 | 0.455721 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.280303 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.506083 |
| 3 | 0.571539 | 0.452685 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.280303 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.557156 |
| 4 | 0.332865 | 0.488508 | 0.511257 | 0.495653 | 0.509164 | 0.16223 | 0.280303 | 0.71259 | 0.529221 | 0.0 | 0.362355 | 0.727399 |
X_1_train, X_1_test, Y_1_train, Y_1_test = train_test_split(X_1, Y_1, test_size = 0.3, random_state=42)
X_2_train, X_2_test, Y_2_train, Y_2_test = train_test_split(X_2, Y_2, test_size = 0.3, random_state=42)
X_3_train, X_3_test, Y_3_train, Y_3_test = train_test_split(X_3, Y_3, test_size = 0.3, random_state=42)
X_3_train_median, X_3_test_median, Y_3_train_median, Y_3_test_median = train_test_split(X_3_median, Y_3_median, test_size = 0.3, random_state=42)
def mean_model(test_model):
return [np.mean(test_model) for _ in range(len(test_model))]
Y_1_predict_mean = mean_model(Y_1_test)
Y_2_predict_mean = mean_model(Y_2_test)
Linear Regressor
slr = LinearRegression()
slr.fit(X_1_train, Y_1_train)
Y_1_train_predict = slr.predict(X_1_train)
Y_1_test_predict = slr.predict(X_1_test)
df_lin = pd.DataFrame({'Actual': Y_1_test, 'Predicted': Y_1_test_predict})
df_lin.head()
| Actual | Predicted | |
|---|---|---|
| 321 | 0.773607 | 0.476773 |
| 70 | 0.228714 | 0.499995 |
| 209 | 0.364619 | 0.517926 |
| 656 | 0.775536 | 0.503051 |
| 685 | 0.303287 | 0.490986 |
round(mean_squared_error(Y_1_test, Y_1_test_predict), 6)
0.034808
round(r2_score(Y_1_test, Y_1_test_predict), 6)
-0.005222
plt.axes(aspect='equal')
plt.scatter(Y_1_test, Y_1_test_predict, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
# график распределения ошибки
error = Y_1_test - Y_1_test_predict
plt.hist(error, bins = 25, color='black')
plt.xlabel('Prediction error')
plt.ylabel('Count');
Сравнение с результатом модели, выдающей среднее значение целевой переменной из тестовой выборки
round(mean_squared_error(Y_1_test, Y_1_predict_mean), 6)
0.034627
round(r2_score(Y_1_test, Y_1_predict_mean), 6)
0.0
plt.axes(aspect='equal')
plt.scatter(Y_1_test, Y_1_predict_mean, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Decision Tree Regressor
dtr = DecisionTreeRegressor(random_state=1)
parameters_dtr = {'max_depth': range (1,7),
'min_samples_leaf': range (1,8),
'min_samples_split': range (2,10,2) }
grid_dtr = GridSearchCV(dtr, parameters_dtr, cv=10)
grid_dtr.fit(X_1_train, Y_1_train)
grid_dtr.best_params_
{'max_depth': 1, 'min_samples_leaf': 6, 'min_samples_split': 2}
decision_tree_model = DecisionTreeRegressor(max_depth = 1, min_samples_leaf = 6, min_samples_split = 2, random_state=1)
decision_tree_model.fit(X_1_train, Y_1_train)
Y_predict_tree = decision_tree_model.predict(X_1_test)
round(mean_squared_error(Y_1_test, Y_predict_tree), 6)
0.034693
round(r2_score(Y_1_test, Y_predict_tree), 6)
-0.001902
plt.axes(aspect='equal')
plt.scatter(Y_1_test, Y_predict_tree, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Random Forest Regressor
rfr = RandomForestRegressor()
parametrs_rfr = { 'n_estimators': range (10, 51, 5),
'max_depth': range (1,7),
'min_samples_leaf': range (1,8)}
grid_rfr = GridSearchCV(rfr, parametrs_rfr, cv=10)
grid_rfr.fit(X_1_train, Y_1_train)
grid_rfr.best_params_
{'max_depth': 1, 'min_samples_leaf': 2, 'n_estimators': 10}
rand_forest_model = RandomForestRegressor(n_estimators = 15, max_depth = 1, min_samples_leaf = 7, min_samples_split=4)
rand_forest_model.fit(X_1_train, Y_1_train)
Y_predict_forest = rand_forest_model.predict(X_1_test)
round(mean_squared_error(Y_1_test, Y_predict_forest), 6)
0.034793
round(r2_score(Y_1_test, Y_predict_forest), 6)
-0.004805
plt.axes(aspect='equal')
plt.scatter(Y_1_test, Y_predict_forest, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Gradient Boosting Regressor
gbr = GradientBoostingRegressor()
parameters_gbr = {'n_estimators': range (30, 51, 10),
'max_depth': range (1,2),
'min_samples_leaf': range (1,5)}
grid_gbr = GridSearchCV(gbr, parameters_gbr, cv=10)
grid_gbr.fit(X_1_train, Y_1_train)
grid_gbr.best_params_
{'max_depth': 1, 'min_samples_leaf': 1, 'n_estimators': 30}
gradient_boosting_model = GradientBoostingRegressor(n_estimators = 50, max_depth = 1, min_samples_leaf = 3)
gradient_boosting_model.fit(X_1_train, Y_1_train)
Y_predict_boosting = gradient_boosting_model.predict(X_1_test)
round(mean_squared_error(Y_1_test, Y_predict_boosting), 6)
0.034329
round(r2_score(Y_1_test, Y_predict_boosting), 6)
0.00862
plt.axes(aspect='equal')
plt.scatter(Y_1_test, Y_predict_boosting, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Сравнение MSE:
mse_compare = pd.DataFrame({'Model': ['Linear Regressor', 'Decision Tree Regressor', 'Random Forest Regressor',
'Gradient Boosting Regressor', 'Mean Function'],
'MSE': [0.034808, 0.034693, 0.034701, 0.034329, 0.034627]}).sort_values('MSE', axis=0, ascending=True, inplace=False)
mse_compare
| Model | MSE | |
|---|---|---|
| 3 | Gradient Boosting Regressor | 0.034329 |
| 4 | Mean Function | 0.034627 |
| 1 | Decision Tree Regressor | 0.034693 |
| 2 | Random Forest Regressor | 0.034701 |
| 0 | Linear Regressor | 0.034808 |
Linear Regressor
slr = LinearRegression()
slr.fit(X_2_train, Y_2_train)
Y_2_train_predict = slr.predict(X_2_train)
Y_2_test_predict = slr.predict(X_2_test)
df_lin = pd.DataFrame({'Actual': Y_2_test, 'Predicted': Y_2_test_predict})
df_lin.head()
| Actual | Predicted | |
|---|---|---|
| 321 | 0.233319 | 0.435826 |
| 70 | 0.613672 | 0.491103 |
| 209 | 0.635615 | 0.516908 |
| 656 | 0.551901 | 0.515739 |
| 685 | 0.321437 | 0.481831 |
round(mean_squared_error(Y_2_test, Y_2_test_predict), 6)
0.035449
round(r2_score(Y_2_test, Y_2_test_predict), 6)
-0.046999
plt.axes(aspect='equal')
plt.scatter(Y_2_test, Y_2_test_predict, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
# график распределения ошибки
error = Y_2_test - Y_2_test_predict
plt.hist(error, bins = 25, color='black')
plt.xlabel('Prediction error')
plt.ylabel('Count');
Сравнение с результатом модели, выдающей среднее значение целевой переменной из тестовой выборки
round(mean_squared_error(Y_2_test, Y_2_predict_mean), 6)
0.033857
round(r2_score(Y_2_test, Y_2_predict_mean), 6)
-0.0
plt.axes(aspect='equal')
plt.scatter(Y_2_test, Y_2_predict_mean, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Decision Tree Regressor
dtr = DecisionTreeRegressor(random_state=1)
parameters_dtr = {'max_depth': range (1,7),
'min_samples_leaf': range (1,8),
'min_samples_split': range (2,10,2) }
grid_dtr = GridSearchCV(dtr, parameters_dtr, cv=10)
grid_dtr.fit(X_2_train, Y_2_train)
grid_dtr.best_params_
{'max_depth': 1, 'min_samples_leaf': 1, 'min_samples_split': 2}
decision_tree_model = DecisionTreeRegressor(max_depth = 1, min_samples_leaf = 1, min_samples_split = 2, random_state=1)
decision_tree_model.fit(X_2_train, Y_2_train)
Y_predict_tree = decision_tree_model.predict(X_2_test)
round(mean_squared_error(Y_2_test, Y_predict_tree), 6)
0.035352
round(r2_score(Y_2_test, Y_predict_tree), 6)
-0.044133
plt.axes(aspect='equal')
plt.scatter(Y_2_test, Y_predict_tree, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Random Forest Regressor
rfr = RandomForestRegressor()
parametrs_rfr = { 'n_estimators': range (10, 31, 10),
'max_depth': range (1,7),
'min_samples_leaf': range (1,8)}
grid_rfr = GridSearchCV(rfr, parametrs_rfr, cv=10)
grid_rfr.fit(X_2_train, Y_2_train)
grid_rfr.best_params_
{'max_depth': 1, 'min_samples_leaf': 5, 'n_estimators': 10}
rand_forest_model = RandomForestRegressor(n_estimators = 10, max_depth = 1, min_samples_leaf = 1)
rand_forest_model.fit(X_2_train, Y_2_train)
Y_predict_forest = rand_forest_model.predict(X_2_test)
round(mean_squared_error(Y_2_test, Y_predict_forest), 6)
0.035063
round(r2_score(Y_2_test, Y_predict_forest), 6)
-0.035611
plt.axes(aspect='equal')
plt.scatter(Y_2_test, Y_predict_forest, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Gradient Boosting Regressor
gbr = GradientBoostingRegressor()
parameters_gbr = {'n_estimators': range (30, 51, 10),
'max_depth': range (1,2),
'min_samples_leaf': range (1,5)}
grid_gbr = GridSearchCV(gbr, parameters_gbr, cv=10)
grid_gbr.fit(X_2_train, Y_2_train)
grid_gbr.best_params_
{'max_depth': 1, 'min_samples_leaf': 3, 'n_estimators': 30}
gradient_boosting_model = GradientBoostingRegressor(n_estimators = 30, max_depth = 1, min_samples_leaf = 3)
gradient_boosting_model.fit(X_1_train, Y_1_train)
Y_predict_boosting = gradient_boosting_model.predict(X_1_test)
round(mean_squared_error(Y_1_test, Y_predict_boosting), 6)
0.034368
round(r2_score(Y_2_test, Y_predict_boosting), 6)
-0.033789
plt.axes(aspect='equal')
plt.scatter(Y_2_test, Y_predict_boosting, color='black')
plt.xlabel('Actual')
plt.ylabel('Predicted')
plt.plot([0, 1],[0, 1]);
Сравнение MSE:
mse_compare = pd.DataFrame({'Model': ['Linear Regressor', 'Decision Tree Regressor', 'Random Forest Regressor',
'Gradient Boosting Regressor', 'Mean Function'],
'MSE': [0.035449, 0.035352, 0.035164, 0.034368, 0.033857]}).sort_values('MSE', axis=0, ascending=True, inplace=False)
mse_compare
| Model | MSE | |
|---|---|---|
| 4 | Mean Function | 0.033857 |
| 3 | Gradient Boosting Regressor | 0.034368 |
| 2 | Random Forest Regressor | 0.035164 |
| 1 | Decision Tree Regressor | 0.035352 |
| 0 | Linear Regressor | 0.035449 |
n_model = keras.Sequential([
layers.Dense(12, activation = 'relu', input_dim=12),
layers.Dense(128, activation = 'relu'),
layers.Dense(64, activation = 'relu'),
layers.Dense(64, activation = 'relu'),
layers.Dense(1),
])
# изменение количества слоев и нейронов выше данных значение практически не показало изменения результата
n_model.compile(optimizer='SGD',
loss='mse',
metrics='mse')
# оптимайзеры Adam, RMSProp показали результаты хуже и были склонны к переобучению (например, Adam увеличивал MSE на тестовой
# выборке уже начиная с 15 эпохи)
n_model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 12) 156
dense_1 (Dense) (None, 128) 1664
dense_2 (Dense) (None, 64) 8256
dense_3 (Dense) (None, 64) 4160
dense_4 (Dense) (None, 1) 65
=================================================================
Total params: 14,301
Trainable params: 14,301
Non-trainable params: 0
_________________________________________________________________
history = n_model.fit(X_3_train, Y_3_train, epochs=100, validation_split = 0.3)
# результаты остаются стабильными и при прохождении 500 эпох
Epoch 1/100 15/15 [==============================] - 1s 14ms/step - loss: 0.1779 - mse: 0.1779 - val_loss: 0.0551 - val_mse: 0.0551 Epoch 2/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0463 - mse: 0.0463 - val_loss: 0.0379 - val_mse: 0.0379 Epoch 3/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0384 - mse: 0.0384 - val_loss: 0.0376 - val_mse: 0.0376 Epoch 4/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0380 - mse: 0.0380 - val_loss: 0.0376 - val_mse: 0.0376 Epoch 5/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0377 - mse: 0.0377 - val_loss: 0.0374 - val_mse: 0.0374 Epoch 6/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0377 - mse: 0.0377 - val_loss: 0.0377 - val_mse: 0.0377 Epoch 7/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0375 - mse: 0.0375 - val_loss: 0.0375 - val_mse: 0.0375 Epoch 8/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0373 - mse: 0.0373 - val_loss: 0.0371 - val_mse: 0.0371 Epoch 9/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0373 - mse: 0.0373 - val_loss: 0.0375 - val_mse: 0.0375 Epoch 10/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0371 - mse: 0.0371 - val_loss: 0.0372 - val_mse: 0.0372 Epoch 11/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0370 - mse: 0.0370 - val_loss: 0.0368 - val_mse: 0.0368 Epoch 12/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0370 - mse: 0.0370 - val_loss: 0.0369 - val_mse: 0.0369 Epoch 13/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0369 - mse: 0.0369 - val_loss: 0.0370 - val_mse: 0.0370 Epoch 14/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0367 - mse: 0.0367 - val_loss: 0.0370 - val_mse: 0.0370 Epoch 15/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0368 - mse: 0.0368 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 16/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0367 - mse: 0.0367 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 17/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0366 - mse: 0.0366 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 18/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0366 - mse: 0.0366 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 19/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0365 - mse: 0.0365 - val_loss: 0.0372 - val_mse: 0.0372 Epoch 20/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0365 - mse: 0.0365 - val_loss: 0.0367 - val_mse: 0.0367 Epoch 21/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0364 - mse: 0.0364 - val_loss: 0.0364 - val_mse: 0.0364 Epoch 22/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0364 - mse: 0.0364 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 23/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0364 - mse: 0.0364 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 24/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0363 - mse: 0.0363 - val_loss: 0.0364 - val_mse: 0.0364 Epoch 25/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0363 - mse: 0.0363 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 26/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0362 - mse: 0.0362 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 27/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0362 - mse: 0.0362 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 28/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0362 - mse: 0.0362 - val_loss: 0.0368 - val_mse: 0.0368 Epoch 29/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0362 - mse: 0.0362 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 30/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0361 - mse: 0.0361 - val_loss: 0.0363 - val_mse: 0.0363 Epoch 31/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0360 - mse: 0.0360 - val_loss: 0.0364 - val_mse: 0.0364 Epoch 32/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0361 - mse: 0.0361 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 33/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0360 - mse: 0.0360 - val_loss: 0.0365 - val_mse: 0.0365 Epoch 34/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0361 - mse: 0.0361 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 35/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0360 - mse: 0.0360 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 36/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0360 - mse: 0.0360 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 37/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0360 - mse: 0.0360 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 38/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0360 - mse: 0.0360 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 39/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0359 - mse: 0.0359 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 40/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0359 - mse: 0.0359 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 41/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0359 - mse: 0.0359 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 42/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0359 - mse: 0.0359 - val_loss: 0.0363 - val_mse: 0.0363 Epoch 43/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0357 - mse: 0.0357 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 44/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0358 - mse: 0.0358 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 45/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0357 - mse: 0.0357 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 46/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0358 - mse: 0.0358 - val_loss: 0.0364 - val_mse: 0.0364 Epoch 47/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0357 - mse: 0.0357 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 48/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0356 - mse: 0.0356 - val_loss: 0.0357 - val_mse: 0.0357 Epoch 49/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0357 - mse: 0.0357 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 50/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0357 - mse: 0.0357 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 51/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0356 - mse: 0.0356 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 52/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0357 - mse: 0.0357 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 53/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0356 - mse: 0.0356 - val_loss: 0.0363 - val_mse: 0.0363 Epoch 54/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0356 - mse: 0.0356 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 55/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0355 - mse: 0.0355 - val_loss: 0.0357 - val_mse: 0.0357 Epoch 56/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0355 - mse: 0.0355 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 57/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 58/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0355 - mse: 0.0355 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 59/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 60/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0355 - mse: 0.0355 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 61/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 62/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 63/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 64/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0355 - mse: 0.0355 - val_loss: 0.0367 - val_mse: 0.0367 Epoch 65/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0355 - mse: 0.0355 - val_loss: 0.0364 - val_mse: 0.0364 Epoch 66/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 67/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0354 - mse: 0.0354 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 68/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 69/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 70/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0356 - val_mse: 0.0356 Epoch 71/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 72/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 73/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0366 - val_mse: 0.0366 Epoch 74/100 15/15 [==============================] - 0s 4ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 75/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0362 - val_mse: 0.0362 Epoch 76/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 77/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0361 - val_mse: 0.0361 Epoch 78/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 79/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0356 - val_mse: 0.0356 Epoch 80/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0353 - mse: 0.0353 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 81/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0356 - val_mse: 0.0356 Epoch 82/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 83/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 84/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 85/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0352 - mse: 0.0352 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 86/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0364 - val_mse: 0.0364 Epoch 87/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0356 - val_mse: 0.0356 Epoch 88/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 89/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 90/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 91/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0357 - val_mse: 0.0357 Epoch 92/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0357 - val_mse: 0.0357 Epoch 93/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0360 - val_mse: 0.0360 Epoch 94/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0350 - mse: 0.0350 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 95/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 96/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0356 - val_mse: 0.0356 Epoch 97/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0351 - mse: 0.0351 - val_loss: 0.0358 - val_mse: 0.0358 Epoch 98/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0350 - mse: 0.0350 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 99/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0350 - mse: 0.0350 - val_loss: 0.0359 - val_mse: 0.0359 Epoch 100/100 15/15 [==============================] - 0s 3ms/step - loss: 0.0350 - mse: 0.0350 - val_loss: 0.0360 - val_mse: 0.0360
n_model.evaluate(X_3_test, Y_3_test, verbose=1)
9/9 [==============================] - 0s 1ms/step - loss: 0.0338 - mse: 0.0338
[0.03383169323205948, 0.03383169323205948]
n_model.save("neural_net")
INFO:tensorflow:Assets written to: neural_net\assets
predict_model = n_model.predict(X_3_test)
predict_values = np.ravel(predict_model)
predict_values
array([0.508, 0.509, 0.523, 0.504, 0.469, 0.506, 0.485, 0.519, 0.491,
0.566, 0.508, 0.579, 0.54 , 0.498, 0.487, 0.503, 0.485, 0.488,
0.512, 0.506, 0.488, 0.513, 0.501, 0.505, 0.548, 0.531, 0.498,
0.487, 0.509, 0.523, 0.495, 0.502, 0.467, 0.496, 0.498, 0.479,
0.497, 0.514, 0.501, 0.486, 0.499, 0.607, 0.499, 0.48 , 0.524,
0.508, 0.504, 0.577, 0.504, 0.499, 0.528, 0.532, 0.513, 0.491,
0.5 , 0.476, 0.529, 0.538, 0.496, 0.523, 0.478, 0.49 , 0.498,
0.481, 0.501, 0.505, 0.482, 0.489, 0.533, 0.503, 0.505, 0.516,
0.544, 0.516, 0.509, 0.534, 0.51 , 0.55 , 0.533, 0.515, 0.57 ,
0.472, 0.488, 0.486, 0.539, 0.556, 0.5 , 0.503, 0.517, 0.494,
0.547, 0.576, 0.494, 0.537, 0.54 , 0.528, 0.552, 0.512, 0.484,
0.537, 0.511, 0.513, 0.497, 0.505, 0.508, 0.509, 0.495, 0.497,
0.473, 0.502, 0.498, 0.563, 0.555, 0.584, 0.516, 0.529, 0.513,
0.512, 0.497, 0.483, 0.492, 0.509, 0.498, 0.51 , 0.477, 0.484,
0.497, 0.549, 0.532, 0.481, 0.492, 0.553, 0.517, 0.508, 0.497,
0.497, 0.483, 0.527, 0.536, 0.481, 0.487, 0.493, 0.5 , 0.518,
0.523, 0.477, 0.511, 0.502, 0.498, 0.457, 0.506, 0.511, 0.507,
0.479, 0.528, 0.505, 0.552, 0.496, 0.549, 0.5 , 0.492, 0.498,
0.501, 0.466, 0.512, 0.532, 0.501, 0.539, 0.472, 0.52 , 0.512,
0.478, 0.514, 0.501, 0.496, 0.481, 0.515, 0.485, 0.581, 0.522,
0.493, 0.556, 0.513, 0.519, 0.532, 0.587, 0.525, 0.553, 0.486,
0.548, 0.559, 0.502, 0.588, 0.487, 0.558, 0.5 , 0.498, 0.49 ,
0.497, 0.499, 0.516, 0.477, 0.485, 0.529, 0.528, 0.54 , 0.533,
0.5 , 0.541, 0.511, 0.469, 0.528, 0.49 , 0.504, 0.504, 0.535,
0.519, 0.575, 0.555, 0.491, 0.585, 0.498, 0.505, 0.513, 0.495,
0.489, 0.465, 0.477, 0.507, 0.526, 0.508, 0.524, 0.542, 0.488,
0.493, 0.514, 0.506, 0.504, 0.554, 0.488, 0.519, 0.518, 0.539,
0.509, 0.518, 0.477, 0.495, 0.531, 0.517, 0.518, 0.524, 0.517,
0.5 , 0.537, 0.513, 0.519, 0.496, 0.547, 0.518, 0.493, 0.497,
0.48 , 0.532, 0.513, 0.497, 0.492, 0.508, 0.571, 0.574, 0.523,
0.502, 0.534, 0.503, 0.516, 0.532, 0.48 , 0.47 , 0.482, 0.535,
0.512, 0.473], dtype=float32)
Y_3_test_values = np.ravel(Y_3_test.copy())
Y_3_test_values
array([0.488, 0.292, 0.246, 0.851, 0.611, 0.448, 0.205, 0.63 , 0.425,
0.384, 0.315, 0.399, 0.6 , 0.411, 0.616, 0.535, 0.39 , 0.34 ,
0.209, 0.471, 0.346, 0.742, 0.662, 0.511, 0.419, 0.865, 0.27 ,
0.578, 0.789, 0.532, 0.433, 0.418, 0.136, 0.313, 0.51 , 0.619,
0.705, 0.199, 0.672, 0.715, 0.448, 0.417, 0.519, 0.245, 0.336,
0.436, 0.606, 0.522, 0.801, 0.443, 0.712, 0.449, 0.59 , 0.405,
0.275, 0.58 , 0.996, 0.393, 0.595, 0.817, 0.088, 0.524, 0.825,
0.588, 0.198, 0.497, 0.636, 0.252, 0.095, 0.728, 0.404, 0.651,
0.554, 0.492, 0.479, 0.815, 0.52 , 0.333, 0.477, 0.306, 0.428,
0.319, 0.671, 0.462, 0.292, 0.491, 0.209, 0.36 , 0.773, 0.453,
0.321, 0.589, 0.451, 0.336, 0.835, 0.243, 0.267, 0.263, 0.551,
0.811, 0.594, 0.281, 0.305, 0.51 , 0.442, 0.804, 0.513, 0.464,
0.737, 0.663, 0.556, 0.631, 0.513, 0.48 , 0.674, 0.34 , 0.346,
0.448, 0.375, 0.515, 0.362, 0.606, 0.75 , 0.597, 0.376, 0.268,
0.444, 0.347, 0.629, 0.544, 0.537, 0.331, 0.394, 0.495, 0.701,
0.572, 0.308, 0.432, 0.796, 0.494, 0.859, 0.13 , 0.422, 0.349,
0.467, 0.589, 0.704, 0.667, 0.479, 0.589, 0.59 , 0.463, 0.104,
0.268, 0.597, 0.159, 0.327, 0.576, 0.647, 0.512, 0.538, 0.652,
0.637, 0.45 , 0.136, 0.464, 0.749, 0.498, 0.394, 0.497, 0.672,
0.279, 0.366, 0.525, 0.38 , 0.35 , 0.505, 0.386, 0.495, 0.582,
0.135, 0.327, 0.25 , 0.735, 0.467, 0.41 , 0.473, 0.691, 0.423,
0.539, 0.344, 0.65 , 0.458, 0.51 , 0.957, 0.63 , 0.543, 0.723,
0.644, 0.339, 0.146, 0.454, 0.335, 0.22 , 0.426, 0.55 , 0.469,
0.319, 0.246, 0.464, 0.487, 0.849, 0.573, 0.447, 0.472, 0.881,
0.291, 0.494, 0.893, 0.491, 0.415, 0.57 , 0.398, 0.381, 0.069,
0.526, 0.213, 0.499, 0.582, 0.486, 0.555, 0.549, 0.409, 0.78 ,
0.839, 0.404, 0.621, 0.221, 0.273, 0.489, 0.394, 0.423, 0.572,
0.715, 0.872, 0.059, 0.777, 0.208, 0.626, 0.641, 0.362, 0.54 ,
0.68 , 0.644, 0.316, 0.397, 0.565, 0.554, 0.51 , 0.781, 0.617,
0.478, 0.42 , 0.685, 0.774, 0.15 , 0.462, 0.7 , 0.74 , 0.508,
0.39 , 0.414, 0.722, 0.239, 0.82 , 0.381, 0.528, 0.441, 0.835,
0.535, 0.569])
График рассеяния. В идеальном случае он должен быть похож на прямую линию
feature_results = pd.DataFrame({'Данные': Y_3_test_values,'Прогноз': predict_values})
sns.set_style('darkgrid')
plt.title('Рассеяние тестовых и прогнозных значений',size=16)
plt.xlabel('Данные',size=12)
plt.ylabel('Прогноз',size=12)
sns.scatterplot(x='Данные', y='Прогноз', data=feature_results, color="black")
plt.show()
feature_results
| Данные | Прогноз | |
|---|---|---|
| 0 | 0.487579 | 0.508204 |
| 1 | 0.292155 | 0.509448 |
| 2 | 0.245958 | 0.522566 |
| 3 | 0.850789 | 0.504228 |
| 4 | 0.611147 | 0.469178 |
| ... | ... | ... |
| 276 | 0.528359 | 0.470481 |
| 277 | 0.440618 | 0.482079 |
| 278 | 0.834572 | 0.534503 |
| 279 | 0.534751 | 0.511791 |
| 280 | 0.569121 | 0.473284 |
281 rows × 2 columns
Попробуем эту же нейросеть на выборке с медианными значениями вместо выбросов
history_median = n_model.fit(X_3_train_median, Y_3_train_median, epochs=100, validation_split = 0.3)
Epoch 1/100 16/16 [==============================] - 0s 6ms/step - loss: 0.0347 - mse: 0.0347 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 2/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 3/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 4/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 5/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 6/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 7/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 8/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 9/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 10/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 11/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 12/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 13/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 14/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 15/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 16/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0346 - mse: 0.0346 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 17/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 18/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 19/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 20/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 21/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0345 - mse: 0.0345 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 22/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 23/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 24/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0323 - val_mse: 0.0323 Epoch 25/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 26/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 27/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 28/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 29/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 30/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 31/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 32/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 33/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 34/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 35/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 36/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 37/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 38/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 39/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 40/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 41/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 42/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 43/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 44/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 45/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 46/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 47/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 48/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 49/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 50/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 51/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 52/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 53/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0344 - mse: 0.0344 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 54/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 55/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 56/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 57/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 58/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 59/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 60/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 61/100 16/16 [==============================] - 0s 4ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0327 - val_mse: 0.0327 Epoch 62/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 63/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 64/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 65/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 66/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 67/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 68/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 69/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 70/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0327 - val_mse: 0.0327 Epoch 71/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 72/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 73/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 74/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 75/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 76/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 77/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 78/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 79/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 80/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 81/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0324 - val_mse: 0.0324 Epoch 82/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 83/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 84/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 85/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 86/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 87/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0343 - mse: 0.0343 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 88/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 89/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0341 - mse: 0.0341 - val_loss: 0.0329 - val_mse: 0.0329 Epoch 90/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0327 - val_mse: 0.0327 Epoch 91/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 92/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 93/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 94/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 95/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 96/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 97/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0325 - val_mse: 0.0325 Epoch 98/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0326 - val_mse: 0.0326 Epoch 99/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0327 - val_mse: 0.0327 Epoch 100/100 16/16 [==============================] - 0s 3ms/step - loss: 0.0342 - mse: 0.0342 - val_loss: 0.0328 - val_mse: 0.0328
n_model.evaluate(X_3_test_median, Y_3_test_median, verbose=1)
10/10 [==============================] - 0s 1ms/step - loss: 0.0366 - mse: 0.0366
[0.036648865789175034, 0.036648865789175034]